obj-y += head.o
-head.o: head.S trampoline.S $(TARGET_SUBARCH).S
+head.o: head.S $(TARGET_SUBARCH).S trampoline.S mem.S
.text
.code32
-#define SYM_PHYS(sym) ((sym) - __XEN_VIRT_START)
-#define SYM_TRAMP_PHYS(sym) ((sym) - trampoline_start + BOOT_TRAMPOLINE)
+#undef bootsym_phys
+#define sym_phys(sym) ((sym) - __XEN_VIRT_START)
+#define bootsym_phys(sym) ((sym) - trampoline_start + BOOT_TRAMPOLINE)
-#define TRAMP_CS32 0x0008
-#define TRAMP_CS64 0x0010
-#define TRAMP_DS 0x0018
+#define BOOT_CS32 0x0008
+#define BOOT_CS64 0x0010
+#define BOOT_DS 0x0018
+#define BOOT_PSEUDORM_CS 0x0020
+#define BOOT_PSEUDORM_DS 0x0028
ENTRY(start)
jmp __start
/* Checksum: must be the negated sum of the first two fields. */
.long -(MULTIBOOT_HEADER_MAGIC + MULTIBOOT_HEADER_FLAGS)
+ .section .init.text
+
.Lbad_cpu_msg: .asciz "ERR: Not a 64-bit CPU!"
.Lbad_ldr_msg: .asciz "ERR: Not a Multiboot bootloader!"
bad_cpu:
- mov $(SYM_PHYS(.Lbad_cpu_msg)),%esi # Error message
+ mov $(sym_phys(.Lbad_cpu_msg)),%esi # Error message
jmp print_err
not_multiboot:
- mov $(SYM_PHYS(.Lbad_ldr_msg)),%esi # Error message
+ mov $(sym_phys(.Lbad_ldr_msg)),%esi # Error message
print_err:
mov $0xB8000,%edi # VGA framebuffer
1: mov (%esi),%bl
jmp 1b
gdt_boot_descr:
- .word 4*8-1
- .long SYM_PHYS(trampoline_gdt)
+ .word 6*8-1
+ .long sym_phys(trampoline_gdt)
__start:
cld
cli
/* Initialise GDT and basic data segments. */
- lgdt %cs:SYM_PHYS(gdt_boot_descr)
- mov $TRAMP_DS,%ecx
+ lgdt %cs:sym_phys(gdt_boot_descr)
+ mov $BOOT_DS,%ecx
mov %ecx,%ds
mov %ecx,%es
jne not_multiboot
/* Save the Multiboot info structure for later use. */
- mov %ebx,SYM_PHYS(multiboot_ptr)
+ mov %ebx,sym_phys(multiboot_ptr)
/* Initialize BSS (no nasty surprises!) */
- mov $SYM_PHYS(__bss_start),%edi
- mov $SYM_PHYS(_end),%ecx
+ mov $sym_phys(__bss_start),%edi
+ mov $sym_phys(_end),%ecx
sub %edi,%ecx
xor %eax,%eax
rep stosb
jbe 1f
mov $0x80000001,%eax
cpuid
-1: mov %edx,SYM_PHYS(cpuid_ext_features)
+1: mov %edx,sym_phys(cpuid_ext_features)
#if defined(__x86_64__)
/* Check for availability of long mode. */
bt $29,%edx
jnc bad_cpu
/* Initialise L2 identity-map and xen page table entries (16MB). */
- mov $SYM_PHYS(l2_identmap),%edi
- mov $SYM_PHYS(l2_xenmap),%esi
+ mov $sym_phys(l2_identmap),%edi
+ mov $sym_phys(l2_xenmap),%esi
mov $0x1e3,%eax /* PRESENT+RW+A+D+2MB+GLOBAL */
mov $8,%ecx
1: mov %eax,(%edi)
add $(1<<L2_PAGETABLE_SHIFT),%eax
loop 1b
/* Initialise L3 identity-map page directory entries. */
- mov $SYM_PHYS(l3_identmap),%edi
- mov $(SYM_PHYS(l2_identmap)+7),%eax
+ mov $sym_phys(l3_identmap),%edi
+ mov $(sym_phys(l2_identmap)+7),%eax
mov $4,%ecx
1: mov %eax,(%edi)
add $8,%edi
add $PAGE_SIZE,%eax
loop 1b
/* Initialise L3 xen-map page directory entry. */
- mov $(SYM_PHYS(l2_xenmap)+7),%eax
- mov %eax,SYM_PHYS(l3_xenmap) + (50*8)
+ mov $(sym_phys(l2_xenmap)+7),%eax
+ mov %eax,sym_phys(l3_xenmap) + (50*8)
/* Hook indentity-map and xen-map L3 tables into PML4. */
- mov $(SYM_PHYS(l3_identmap)+7),%eax
- mov %eax,SYM_PHYS(idle_pg_table) + ( 0*8) /* PML4[ 0]: 1:1 map */
- mov %eax,SYM_PHYS(idle_pg_table) + (262*8) /* PML4[262]: 1:1 map */
- mov $(SYM_PHYS(l3_xenmap)+7),%eax
- mov %eax,SYM_PHYS(idle_pg_table) + (261*8) /* PML4[261]: xen map */
+ mov $(sym_phys(l3_identmap)+7),%eax
+ mov %eax,sym_phys(idle_pg_table) + ( 0*8) /* PML4[ 0]: 1:1 map */
+ mov %eax,sym_phys(idle_pg_table) + (262*8) /* PML4[262]: 1:1 map */
+ mov $(sym_phys(l3_xenmap)+7),%eax
+ mov %eax,sym_phys(idle_pg_table) + (261*8) /* PML4[261]: xen map */
#elif defined(CONFIG_X86_PAE)
/* Initialize low and high mappings of memory with 2MB pages */
- mov $SYM_PHYS(idle_pg_table_l2),%edi
+ mov $sym_phys(idle_pg_table_l2),%edi
mov $0xe3,%eax /* PRESENT+RW+A+D+2MB */
1: mov %eax,__PAGE_OFFSET>>18(%edi) /* high mapping */
stosl /* low mapping */
jne 1b
#else
/* Initialize low and high mappings of memory with 4MB pages */
- mov $SYM_PHYS(idle_pg_table),%edi
+ mov $sym_phys(idle_pg_table),%edi
mov $0xe3,%eax /* PRESENT+RW+A+D+4MB */
1: mov %eax,__PAGE_OFFSET>>20(%edi) /* high mapping */
stosl /* low mapping */
#endif
/* Copy bootstrap trampoline to low memory, below 1MB. */
- mov $SYM_PHYS(trampoline_start),%esi
- mov $SYM_TRAMP_PHYS(trampoline_start),%edi
+ mov $sym_phys(trampoline_start),%esi
+ mov $bootsym_phys(trampoline_start),%edi
mov $trampoline_end - trampoline_start,%ecx
rep movsb
- /* EBX == 0 indicates we are the BP (Boot Processor). */
- xor %ebx,%ebx
-
/* Jump into the relocated trampoline. */
- jmp $TRAMP_CS32,$SYM_TRAMP_PHYS(trampoline_protmode_entry)
+ jmp $BOOT_CS32,$bootsym_phys(trampoline_boot_cpu_entry)
.globl trampoline_start, trampoline_end
trampoline_start:
#include "trampoline.S"
trampoline_end:
+ .text
__high_start:
#ifdef __x86_64__
#include "x86_64.S"
--- /dev/null
+ .code16
+
+#define SMAP 0x534d4150
+#define E820MAX 128
+
+get_memory_map:
+
+.Lmeme820:
+ xorl %ebx, %ebx # continuation counter
+ movw $bootsym(e820map), %di # point into the whitelist
+ # so we can have the bios
+ # directly write into it.
+
+1: movl $0x0000e820, %eax # e820, upper word zeroed
+ movl $SMAP,%edx # ascii 'SMAP'
+ movl $20,%ecx # size of the e820rec
+ pushw %ds # data record.
+ popw %es
+ int $0x15
+ jc .Lmem88
+
+ cmpl $SMAP,%eax # check the return is `SMAP'
+ jne .Lmem88
+
+ movb bootsym(e820nr),%al # up to 128 entries
+ cmpb $E820MAX,%al
+ jae .Lmem88
+
+ incb bootsym(e820nr)
+ movw %di,%ax
+ addw $20,%ax
+ movw %ax,%di
+ cmpl $0,%ebx # check to see if
+ jne 1b # %ebx is set to EOF
+
+.Lmem88:
+ movb $0x88, %ah
+ int $0x15
+ movw %ax,bootsym(highmem_kb)
+
+.Lmeme801:
+ stc # fix to work around buggy
+ xorw %cx,%cx # BIOSes which don't clear/set
+ xorw %dx,%dx # carry on pass/error of
+ # e801h memory size call
+ # or merely pass cx,dx though
+ # without changing them.
+ movw $0xe801, %ax
+ int $0x15
+ jc .Lint12
+
+ cmpw $0x0, %cx # Kludge to handle BIOSes
+ jne 1f # which report their extended
+ cmpw $0x0, %dx # memory in AX/BX rather than
+ jne 1f # CX/DX. The spec I have read
+ movw %ax, %cx # seems to indicate AX/BX
+ movw %bx, %dx # are more reasonable anyway...
+1: andl $0xffff,%edx # clear sign extend
+ shll $6,%edx # and go from 64k to 1k chunks
+ movl %edx,bootsym(highmem_kb) # store extended memory size
+ andl $0xffff,%ecx # clear sign extend
+ addl %ecx,bootsym(highmem_kb) # and add lower memory into
+
+.Lint12:
+ int $0x12
+ movw %ax,bootsym(lowmem_kb)
+
+ ret
+
+ .globl e820map, e820nr, lowmem_kb, highmem_kb
+e820map:
+ .fill E820MAX*20,1,0
+e820nr:
+ .byte 0
+lowmem_kb:
+ .long 0
+highmem_kb:
+ .long 0
.code16
+/* NB. bootsym() is only usable in real mode, or via BOOT_PSEUDORM_DS. */
+#undef bootsym
+#define bootsym(s) ((s)-trampoline_start)
+
.globl trampoline_realmode_entry
trampoline_realmode_entry:
nop # We use this byte as a progress flag
- movb $0xA5,trampoline_cpu_started - trampoline_start
+ movb $0xA5,bootsym(trampoline_cpu_started)
cld
cli
- lidt %cs:idt_48 - trampoline_start
- lgdt %cs:gdt_48 - trampoline_start
+ lidt %cs:bootsym(idt_48)
+ lgdt %cs:bootsym(gdt_48)
xor %ax, %ax
inc %ax
lmsw %ax # CR0.PE = 1 (enter protected mode)
mov $1,%bl # EBX != 0 indicates we are an AP
jmp 1f
-1: ljmpl $TRAMP_CS32,$SYM_TRAMP_PHYS(trampoline_protmode_entry)
+1: ljmpl $BOOT_CS32,$bootsym_phys(trampoline_protmode_entry)
idt_48: .word 0, 0, 0 # base = limit = 0
-gdt_48: .word 4*8-1
- .long SYM_TRAMP_PHYS(trampoline_gdt)
+gdt_48: .word 6*8-1
+ .long bootsym_phys(trampoline_gdt)
trampoline_gdt:
.quad 0x0000000000000000 /* 0x0000: unused */
.quad 0x00cf9a000000ffff /* 0x0008: ring 0 code, 32-bit mode */
.quad 0x00af9a000000ffff /* 0x0010: ring 0 code, 64-bit mode */
.quad 0x00cf92000000ffff /* 0x0018: ring 0 data */
+ .quad 0x00009a090000ffff /* 0x0020: real-mode code @ 0x90000 */
+ .quad 0x000092090000ffff /* 0x0028: real-mode data @ 0x90000 */
cpuid_ext_features:
.long 0
.code32
trampoline_protmode_entry:
/* Set up a few descriptors: on entry only CS is guaranteed good. */
- mov $TRAMP_DS,%eax
+ mov $BOOT_DS,%eax
mov %eax,%ds
mov %eax,%es
mov %ecx,%cr4
/* Load pagetable base register. */
- mov $SYM_PHYS(idle_pg_table),%eax
- add SYM_TRAMP_PHYS(trampoline_xen_phys_start),%eax
+ mov $sym_phys(idle_pg_table),%eax
+ add bootsym_phys(trampoline_xen_phys_start),%eax
mov %eax,%cr3
#if CONFIG_PAGING_LEVELS != 2
/* Set up EFER (Extended Feature Enable Register). */
- mov SYM_TRAMP_PHYS(cpuid_ext_features),%edi
+ mov bootsym_phys(cpuid_ext_features),%edi
test $0x20100800,%edi /* SYSCALL/SYSRET, No Execute, Long Mode? */
jz .Lskip_efer
movl $MSR_EFER,%ecx
#if defined(__x86_64__)
/* Now in compatibility mode. Long-jump into 64-bit mode. */
- ljmp $TRAMP_CS64,$SYM_TRAMP_PHYS(start64)
+ ljmp $BOOT_CS64,$bootsym_phys(start64)
.code64
start64:
ljmp $(__HYPERVISOR_CS),$__high_start
#endif
+
+ .code32
+trampoline_boot_cpu_entry:
+ /* Load pseudo-real-mode segments. */
+ mov $BOOT_PSEUDORM_DS,%eax
+ mov %eax,%ds
+ mov %eax,%es
+ mov %eax,%fs
+ mov %eax,%gs
+ mov %eax,%ss
+
+ /* Switch to pseudo-rm CS, enter real mode, and flush insn queue. */
+ mov %cr0,%eax
+ dec %eax
+ ljmp $BOOT_PSEUDORM_CS,$bootsym(1f)
+ .code16
+1: mov %eax,%cr0 # CR0.PE = 0 (leave protected mode)
+ jmp 1f
+
+ /* Load proper real-mode values into %cs, %ds, %es and %ss. */
+1: ljmp $(BOOT_TRAMPOLINE>>4),$bootsym(1f)
+1: mov $(BOOT_TRAMPOLINE>>4),%ax
+ mov %ax,%ds
+ mov %ax,%es
+ mov %ax,%ss
+
+ /* Stack grows down from 0x9200. Initialise IDT and enable irqs. */
+ mov $0x2000,%sp
+ lidt bootsym(rm_idt)
+ sti
+
+ /*
+ * Do real-mode work:
+ * 1. Get memory map.
+ */
+ call get_memory_map
+
+ /* Disable irqs before returning to protected mode. */
+ cli
+
+ /* Enter protected mode, and flush insn queue. */
+ xor %ax,%ax
+ inc %ax
+ lmsw %ax # CR0.PE = 1 (enter protected mode)
+ jmp 1f
+
+ /* Load proper protected-mode values into all segment registers. */
+1: ljmpl $BOOT_CS32,$bootsym_phys(1f)
+ .code32
+1: mov $BOOT_DS,%eax
+ mov %eax,%ds
+ mov %eax,%es
+ mov %eax,%fs
+ mov %eax,%gs
+ mov %eax,%ss
+
+ /* EBX == 0 indicates we are the BP (Boot Processor). */
+ xor %ebx,%ebx
+
+ /* Jump to the common bootstrap entry point. */
+ jmp trampoline_protmode_entry
+
+rm_idt: .word 256*4-1, 0, 0
+
+#include "mem.S"
.word 0
nopaging_gdt_descr:
.word LAST_RESERVED_GDT_BYTE
- .long SYM_PHYS(gdt_table) - FIRST_RESERVED_GDT_BYTE
+ .long sym_phys(gdt_table) - FIRST_RESERVED_GDT_BYTE
.align PAGE_SIZE, 0
/* NB. Rings != 0 get access up to MACH2PHYS_VIRT_END. This allows access to */
#ifdef CONFIG_X86_PAE
.align 32
ENTRY(idle_pg_table)
- .long SYM_PHYS(idle_pg_table_l2) + 0*PAGE_SIZE + 0x01, 0
- .long SYM_PHYS(idle_pg_table_l2) + 1*PAGE_SIZE + 0x01, 0
- .long SYM_PHYS(idle_pg_table_l2) + 2*PAGE_SIZE + 0x01, 0
- .long SYM_PHYS(idle_pg_table_l2) + 3*PAGE_SIZE + 0x01, 0
+ .long sym_phys(idle_pg_table_l2) + 0*PAGE_SIZE + 0x01, 0
+ .long sym_phys(idle_pg_table_l2) + 1*PAGE_SIZE + 0x01, 0
+ .long sym_phys(idle_pg_table_l2) + 2*PAGE_SIZE + 0x01, 0
+ .long sym_phys(idle_pg_table_l2) + 3*PAGE_SIZE + 0x01, 0
#endif
if ( opt_xenheap_megabytes > 2048 )
opt_xenheap_megabytes = 2048;
- if ( mbi->flags & MBI_MEMMAP )
+ if ( bootsym(e820nr) != 0 )
+ {
+ e820_raw_nr = bootsym(e820nr);
+ memcpy(e820_raw, bootsym(e820map), e820_raw_nr * sizeof(e820_raw[0]));
+ }
+ else if ( lowmem_kb )
+ {
+ e820_raw[0].addr = 0;
+ e820_raw[0].size = lowmem_kb << 10;
+ e820_raw[0].type = E820_RAM;
+ e820_raw[1].addr = 0x100000;
+ e820_raw[1].size = highmem_kb << 10;
+ e820_raw[1].type = E820_RAM;
+ e820_raw_nr = 2;
+ }
+ else if ( mbi->flags & MBI_MEMMAP )
{
while ( bytes < mbi->mmap_length )
{
*/
if ( (map->base_addr_high == 0) && (map->length_high != 0) )
{
- e820_warn = 1;
+ if ( !e820_warn )
+ {
+ printk("WARNING: Buggy e820 map detected and fixed "
+ "(truncated length fields).\n");
+ e820_warn = 1;
+ }
map->length_high = 0;
}
EARLY_FAIL("Bootloader provided no memory information.\n");
}
- if ( e820_warn )
- printk("WARNING: Buggy e820 map detected and fixed "
- "(truncated length fields).\n");
-
/* Ensure that all E820 RAM regions are page-aligned and -sized. */
for ( i = 0; i < e820_raw_nr; i++ )
{
uint64_t s, e;
+
if ( e820_raw[i].type != E820_RAM )
continue;
s = PFN_UP(e820_raw[i].addr);
/* Select relocation address. */
e = (e - (opt_xenheap_megabytes << 20)) & ~mask;
xen_phys_start = e;
- boot_trampoline_va(trampoline_xen_phys_start) = e;
+ bootsym(trampoline_xen_phys_start) = e;
/*
* Perform relocation to new physical address.
#include <smpboot_hooks.h>
#define set_kernel_exec(x, y) (0)
-#define setup_trampoline() (boot_trampoline_pa(trampoline_realmode_entry))
+#define setup_trampoline() (bootsym_phys(trampoline_realmode_entry))
/* Set if we find a B stepping CPU */
static int __devinitdata smp_b_stepping;
} else {
boot_error = 1;
mb();
- if (boot_trampoline_va(trampoline_cpu_started) == 0xA5)
+ if (bootsym(trampoline_cpu_started) == 0xA5)
/* trampoline started but...? */
printk("Stuck ??\n");
else
}
/* mark "stuck" area as not stuck */
- boot_trampoline_va(trampoline_cpu_started) = 0;
+ bootsym(trampoline_cpu_started) = 0;
mb();
return boot_error;
#define CONFIG_DMA_BITSIZE 32
#define BOOT_TRAMPOLINE 0x90000
-#define boot_trampoline_pa(sym) \
+#define bootsym_phys(sym) \
(((unsigned long)&(sym)-(unsigned long)&trampoline_start)+BOOT_TRAMPOLINE)
-#define boot_trampoline_va(sym) \
- (*RELOC_HIDE((typeof(&(sym)))__va(__pa(&(sym))), \
+#define bootsym(sym) \
+ (*RELOC_HIDE((typeof(&(sym)))__va(__pa(&(sym))), \
BOOT_TRAMPOLINE-__pa(trampoline_start)))
#ifndef __ASSEMBLY__
extern char trampoline_start[], trampoline_end[];
extern unsigned long init_e820(struct e820entry *, int *);
extern struct e820map e820;
+/* These symbols live in the boot trampoline. */
+extern struct e820entry e820map[];
+extern unsigned char e820nr;
+extern unsigned int lowmem_kb, highmem_kb;
+
#endif /*__E820_HEADER*/